In [ ]:
import folium
from branca.element import Figure

def show_route(df):
    m = folium.Map(scrollWheelZoom=False,
                attr='Map data: © OpenStreetMap',
                tiles='http://maps.refuges.info/hiking/{z}/{x}/{y}.png',)

    points = df[['lat', 'lon']].values
    folium.PolyLine(points).add_to(m)
    m.fit_bounds(points.tolist())

    fig = Figure(width='100%', height=300)
    fig.add_child(m)
    display(fig)
In [1]:
from IPython.core.display import HTML
def configure_plotly_browser_state():
    import IPython
    display(HTML('''
        <script src="/static/components/requirejs/require.js"></script>
        <script>
          requirejs.config({
            paths: {
              base: '/static/base',
              plotly: 'https://cdn.plot.ly/plotly-latest.min'
            },
          });
        </script>
        '''))

configure_plotly_browser_state()
In [ ]:
import torch
import numpy as np
from data_processor import create_sliding_windows

TIME_STEPS = 64

def predict_with_pytorch_model(model, df_p, FEATURES, TIME_STEPS, batch_size=64, device='cuda'):
    # Ensure model is in evaluation mode
    model.eval()

    # Create input data
    pred_inp_data = create_sliding_windows(df_p[FEATURES[1:]].values, TIME_STEPS, sampling_ratio=1)

    # Convert to PyTorch tensors
    pred_inp_tensor = torch.FloatTensor(pred_inp_data)

    # Create DataLoader for batch processing
    pred_dataset = torch.utils.data.TensorDataset(pred_inp_tensor)
    pred_loader = torch.utils.data.DataLoader(pred_dataset, batch_size=batch_size, shuffle=False)

    # Initialize list to store predictions
    predictions = []

    # Disable gradient calculations for inference
    with torch.no_grad():
        # Move model to specified device if not already there
        model = model.to(device)

        # Process each batch
        for batch in pred_loader:
            # Move batch to device
            batch_x = batch[0].to(device)

            # Get model predictions
            batch_pred = model(batch_x)

            # Move predictions to CPU and convert to numpy
            batch_pred = batch_pred.cpu().numpy()
            predictions.append(batch_pred)

    # Concatenate all batch predictions
    predictions = np.concatenate(predictions, axis=0)

    # Initialize arrays for aggregation
    aggregated_predictions = np.zeros(len(df_p))
    counts = np.zeros(len(df_p))

    # Aggregate predictions from sliding windows
    for i in range(len(predictions)):
        # For each sequence, add its predictions to the corresponding positions
        for j in range(TIME_STEPS):
            if i + j < len(df_p):  # Ensure we don't go out of bounds
                aggregated_predictions[i + j] += predictions[i, j, 0]
                counts[i + j] += 1

    # Calculate average predictions
    # Handle division by zero
    mask = counts > 0
    aggregated_predictions[mask] = aggregated_predictions[mask] / counts[mask]

    # Add predictions to dataframe
    df_p['expected_heart_rate'] = aggregated_predictions

    return df_p, predictions
In [ ]:
import pandas as pd
import plotly.express as px
import plotly.graph_objects as go

from data_processor import scaling_factors

def plot_metrics(df, metrics, height=250, point_size=2, legend_marker_size=40):
    # Create figure with secondary y-axis
    fig = go.Figure()

    # Plot altitude as filled area first (so it's in background)
    fig.add_trace(
        go.Scatter(
            x=df.index,
            y=np.expm1(df['altitude'] * np.log(8001)),
            name='altitude',
            fill='tozeroy',
            mode='none',
            line=dict(width=0),
            fillcolor='rgba(200, 200, 200, 0.2)',  # Light grey with transparency
            yaxis='y2',
        )
    )

    # Create a DataFrame to store the scaled metrics
    colors = px.colors.qualitative.Plotly  # Get a color sequence
    plot_data = pd.DataFrame()
    for color_idx, metric in enumerate(metrics):
        if metric in df:
            scaled_values = df[metric] * scaling_factors.get(metric, 1)
            temp_df = pd.DataFrame({'value': scaled_values, 'metric': metric, 'index': df.index})
            plot_data = pd.concat([plot_data, temp_df], ignore_index=True)
            fig.add_trace(
                go.Scatter(
                    x=temp_df.index,
                    y=scaled_values,
                    name=metric,
                    mode='markers',
                    marker=(dict(size=point_size, color=colors[color_idx])),
                )
            )

    # Update legend symbol size
    fig.update_layout(
        template='plotly_dark',
        height=height,
        xaxis_title='',
        yaxis_title='',
        margin=dict(l=4, r=4, t=4, b=1),
        legend=dict(
            orientation="h",
            yanchor="bottom",
            y=-0.3,
            xanchor="center",
            x=0.5,
            title=dict(text=""),
            itemsizing='constant',  # Makes legend items constant size
            itemwidth=legend_marker_size  # Controls the size of legend symbols
        ),
        yaxis2=dict(
            overlaying='y',
            side='right',
            showgrid=False,
            zeroline=False,
            range=[
                np.expm1(df['altitude'] * np.log(8001)).min(),
                np.expm1(df['altitude'] * np.log(8001)).max()]
        ),
    )
    fig.show()

7 peaks with Brian where HR spiked up in the early part¶

In [ ]:
from fitfile_processor import read_fit
from data_processor import augment_df, normalize_df

df_p = read_fit('/Users/louka/Documents/training/2024-11-30-04-28-30.fit') \
    .set_index('timestamp').sort_index()

augment_df(df_p)
normalize_df(df_p)

df_p, predictions = predict_with_pytorch_model(
    model=model,
    df_p=df_p,
    FEATURES=('heart_rate,power,cadence_ma_5,speed_ma_5,vertical_oscillation,' + \
        'altitude,temperature,grade,effort_score').split(','),
    TIME_STEPS=TIME_STEPS,
    batch_size=64,

    device=torch.device('mps' if torch.backends.mps.is_available else 'cpu')
)
In [ ]:
show_route(df_p)
In [ ]:
plot_metrics(df_p, ['heart_rate', 'expected_heart_rate', 'cadence', 'temperature'], height=350)